var runtime.memstats
134 uses
runtime (current package)
arena.go#L799: stats := memstats.heapStats.acquire()
arena.go#L804: memstats.heapStats.release()
arena.go#L960: stats := memstats.heapStats.acquire()
arena.go#L967: memstats.heapStats.release()
heapdump.go#L709: sysFree(unsafe.Pointer(&tmpbuf[0]), uintptr(len(tmpbuf)), &memstats.other_sys)
heapdump.go#L727: sysFree(unsafe.Pointer(&tmpbuf[0]), uintptr(len(tmpbuf)), &memstats.other_sys)
heapdump.go#L730: p := sysAlloc(n, &memstats.other_sys)
iface.go#L67: m = (*itab)(persistentalloc(unsafe.Sizeof(itab{})+uintptr(len(inter.Methods)-1)*goarch.PtrSize, 0, &memstats.other_sys))
malloc.go#L755: r = (*heapArena)(h.heapArenaAlloc.alloc(unsafe.Sizeof(*r), goarch.PtrSize, &memstats.gcMiscSys))
malloc.go#L757: r = (*heapArena)(persistentalloc(unsafe.Sizeof(*r), goarch.PtrSize, &memstats.gcMiscSys))
malloc.go#L770: newArray := (*notInHeap)(persistentalloc(size, goarch.PtrSize, &memstats.gcMiscSys))
malloc.go#L995: return persistentalloc(size, align, &memstats.other_sys)
malloc.go#L1509: persistent.base = (*notInHeap)(sysAlloc(persistentChunkSize, &memstats.other_sys))
malloc.go#L1534: if sysStat != &memstats.other_sys {
malloc.go#L1536: memstats.other_sys.add(-int64(size))
mbitmap.go#L1124: x := (*[1 << 30]byte)(persistentalloc(n+1, 1, &memstats.buckhash_sys))[:n+1]
mcache.go#L162: stats := memstats.heapStats.acquire()
mcache.go#L171: memstats.heapStats.release()
mcache.go#L240: stats := memstats.heapStats.acquire()
mcache.go#L243: memstats.heapStats.release()
mcache.go#L273: stats := memstats.heapStats.acquire()
mcache.go#L275: memstats.heapStats.release()
mcache.go#L300: stats := memstats.heapStats.acquire()
mcache.go#L303: memstats.heapStats.release()
mcheckmark.go#L48: bitmap = (*checkmarksMap)(persistentalloc(unsafe.Sizeof(*bitmap), 0, &memstats.gcMiscSys))
metrics.go#L324: hist.counts[0] = memstats.gcPauseDist.underflow.Load()
metrics.go#L325: for i := range memstats.gcPauseDist.counts {
metrics.go#L326: hist.counts[i+1] = memstats.gcPauseDist.counts[i].Load()
metrics.go#L328: hist.counts[len(hist.counts)-1] = memstats.gcPauseDist.overflow.Load()
metrics.go#L598: memstats.heapStats.read(&a.heapStatsDelta)
metrics.go#L640: a.stacksSys = memstats.stacks_sys.load()
metrics.go#L641: a.buckHashSys = memstats.buckhash_sys.load()
metrics.go#L642: a.gcMiscSys = memstats.gcMiscSys.load()
metrics.go#L643: a.otherSys = memstats.other_sys.load()
metrics.go#L645: a.gcCyclesDone = uint64(memstats.numgc)
metrics.go#L646: a.gcCyclesForced = uint64(memstats.numforcedgc)
metrics.go#L650: a.mSpanSys = memstats.mspan_sys.load()
metrics.go#L652: a.mCacheSys = memstats.mcache_sys.load()
mfinal.go#L108: finc = (*finblock)(persistentalloc(_FinBlockSize, 0, &memstats.gcMiscSys))
mgc.go#L204: memstats.enablegc = true // now that runtime is initialized, GC is okay
mgc.go#L570: if !memstats.enablegc || panicking.Load() != 0 || gcphase != _GCoff {
mgc.go#L585: lastgc := int64(atomic.Load64(&memstats.last_gc_nanotime))
mgc.go#L752: memstats.gcPauseDist.record(now - work.pauseStart)
mgc.go#L904: memstats.gcPauseDist.record(now - work.pauseStart)
mgc.go#L1008: memstats.lastHeapInUse = gcController.heapInUse.load()
mgc.go#L1020: memstats.gcPauseDist.record(now - work.pauseStart)
mgc.go#L1021: atomic.Store64(&memstats.last_gc_unix, uint64(unixNow)) // must be Unix time to make sense to user
mgc.go#L1022: atomic.Store64(&memstats.last_gc_nanotime, uint64(now)) // monotonic time for us
mgc.go#L1023: memstats.pause_ns[memstats.numgc%uint32(len(memstats.pause_ns))] = uint64(work.pauseNS)
mgc.go#L1024: memstats.pause_end[memstats.numgc%uint32(len(memstats.pause_end))] = uint64(unixNow)
mgc.go#L1025: memstats.pause_total_ns += uint64(work.pauseNS)
mgc.go#L1038: memstats.gc_cpu_fraction = float64(work.cpuStats.gcTotalTime-work.cpuStats.gcIdleTime) / float64(work.cpuStats.totalTime)
mgc.go#L1055: memstats.numforcedgc++
mgc.go#L1060: memstats.numgc++
mgc.go#L1134: util := int(memstats.gc_cpu_fraction * 100)
mgc.go#L1138: print("gc ", memstats.numgc,
mgclimit.go#L282: l.lastEnabledCycle.Store(memstats.numgc + 1)
mgclimit.go#L334: l.lastEnabledCycle.Store(memstats.numgc + 1)
mgcscavenge.go#L203: gcPercentGoal := uint64(float64(memstats.lastHeapInUse) * goalRatio)
mgcscavenge.go#L792: stats := memstats.heapStats.acquire()
mgcscavenge.go#L795: memstats.heapStats.release()
mgcsweep.go#L744: stats := memstats.heapStats.acquire()
mgcsweep.go#L746: memstats.heapStats.release()
mgcsweep.go#L796: stats := memstats.heapStats.acquire()
mgcsweep.go#L799: memstats.heapStats.release()
mheap.go#L545: sp.array = sysAlloc(uintptr(n)*goarch.PtrSize, &memstats.other_sys)
mheap.go#L557: sysFree(unsafe.Pointer(&oldAllspans[0]), uintptr(cap(oldAllspans))*unsafe.Sizeof(oldAllspans[0]), &memstats.other_sys)
mheap.go#L756: h.spanalloc.init(unsafe.Sizeof(mspan{}), recordspan, unsafe.Pointer(h), &memstats.mspan_sys)
mheap.go#L757: h.cachealloc.init(unsafe.Sizeof(mcache{}), nil, nil, &memstats.mcache_sys)
mheap.go#L758: h.specialfinalizeralloc.init(unsafe.Sizeof(specialfinalizer{}), nil, nil, &memstats.other_sys)
mheap.go#L759: h.specialprofilealloc.init(unsafe.Sizeof(specialprofile{}), nil, nil, &memstats.other_sys)
mheap.go#L760: h.specialReachableAlloc.init(unsafe.Sizeof(specialReachable{}), nil, nil, &memstats.other_sys)
mheap.go#L761: h.specialPinCounterAlloc.init(unsafe.Sizeof(specialPinCounter{}), nil, nil, &memstats.other_sys)
mheap.go#L762: h.arenaHintAlloc.init(unsafe.Sizeof(arenaHint{}), nil, nil, &memstats.other_sys)
mheap.go#L779: h.pages.init(&h.lock, &memstats.gcMiscSys, false)
mheap.go#L1360: stats := memstats.heapStats.acquire()
mheap.go#L1373: memstats.heapStats.release()
mheap.go#L1504: stats := memstats.heapStats.acquire()
mheap.go#L1506: memstats.heapStats.release()
mheap.go#L1537: stats := memstats.heapStats.acquire()
mheap.go#L1539: memstats.heapStats.release()
mheap.go#L1625: stats := memstats.heapStats.acquire()
mheap.go#L1636: memstats.heapStats.release()
mheap.go#L2241: result = (*gcBitsArena)(sysAlloc(gcBitsChunkBytes, &memstats.gcMiscSys))
mprof.go#L225: b := (*bucket)(persistentalloc(size, 0, &memstats.buckhash_sys))
mprof.go#L263: bh = (*buckhashArray)(sysAlloc(unsafe.Sizeof(buckhashArray{}), &memstats.buckhash_sys))
mspanset.go#L104: newSpine := persistentalloc(newCap*goarch.PtrSize, cpu.CacheLineSize, &memstats.gcMiscSys)
mspanset.go#L316: return (*spanSetBlock)(persistentalloc(unsafe.Sizeof(spanSetBlock{}), cpu.CacheLineSize, &memstats.gcMiscSys))
mstats.go#L55: var memstats mstats
mstats.go#L341: if offset := unsafe.Offsetof(memstats.heapStats); offset%8 != 0 {
mstats.go#L392: memstats.heapStats.unsafeRead(&consStats)
mstats.go#L438: memstats.stacks_sys.load() + memstats.mspan_sys.load() + memstats.mcache_sys.load() +
mstats.go#L439: memstats.buckhash_sys.load() + memstats.gcMiscSys.load() + memstats.other_sys.load() +
mstats.go#L528: stats.StackSys = stackInUse + memstats.stacks_sys.load()
mstats.go#L530: stats.MSpanSys = memstats.mspan_sys.load()
mstats.go#L532: stats.MCacheSys = memstats.mcache_sys.load()
mstats.go#L533: stats.BuckHashSys = memstats.buckhash_sys.load()
mstats.go#L537: stats.GCSys = memstats.gcMiscSys.load() + gcWorkBufInUse + gcProgPtrScalarBitsInUse
mstats.go#L538: stats.OtherSys = memstats.other_sys.load()
mstats.go#L540: stats.LastGC = memstats.last_gc_unix
mstats.go#L541: stats.PauseTotalNs = memstats.pause_total_ns
mstats.go#L542: stats.PauseNs = memstats.pause_ns
mstats.go#L543: stats.PauseEnd = memstats.pause_end
mstats.go#L544: stats.NumGC = memstats.numgc
mstats.go#L545: stats.NumForcedGC = memstats.numforcedgc
mstats.go#L546: stats.GCCPUFraction = memstats.gc_cpu_fraction
mstats.go#L570: if cap(p) < len(memstats.pause_ns)+3 {
mstats.go#L577: n := memstats.numgc
mstats.go#L578: if n > uint32(len(memstats.pause_ns)) {
mstats.go#L579: n = uint32(len(memstats.pause_ns))
mstats.go#L588: j := (memstats.numgc - 1 - i) % uint32(len(memstats.pause_ns))
mstats.go#L589: p[i] = memstats.pause_ns[j]
mstats.go#L590: p[n+i] = memstats.pause_end[j]
mstats.go#L593: p[n+n] = memstats.last_gc_unix
mstats.go#L594: p[n+n+1] = uint64(memstats.numgc)
mstats.go#L595: p[n+n+2] = memstats.pause_total_ns
netpoll.go#L665: mem := persistentalloc(n*pdSize, 0, &memstats.other_sys)
os_linux.go#L203: stack := sysAlloc(stacksize, &memstats.stacks_sys)
proc.go#L710: lockInit(&memstats.heapStats.noPLock, lockRankLeafRank)
stack.go#L353: v := sysAlloc(uintptr(n), &memstats.stacks_sys)
stack.go#L457: sysFree(v, n, &memstats.stacks_sys)
trace.go#L511: sysFree(unsafe.Pointer(buf), unsafe.Sizeof(*buf.ptr()), &memstats.other_sys)
trace.go#L1068: buf = traceBufPtr(sysAlloc(unsafe.Sizeof(traceBuf{}), &memstats.other_sys))
trace.go#L1455: block := (*traceAllocBlock)(sysAlloc(unsafe.Sizeof(traceAllocBlock{}), &memstats.other_sys))
trace.go#L1473: sysFree(unsafe.Pointer(block), unsafe.Sizeof(traceAllocBlock{}), &memstats.other_sys)
|
The pages are generated with Golds v0.6.7. (GOOS=linux GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |